iT邦幫忙

2025 iThome 鐵人賽

DAY 17
0
Software Development

Vibe Unity - AI時代的遊戲開發工作流系列 第 17

Day 17 - OLlama 本地化的AI模型 API

  • 分享至 

  • xImage
  •  

image.png

上一章我們介紹了 Groq 這個免費的 API平台,
但是 Groq 還是有 Rate Limit, 需要聯網等的限制
這一章我們要來介紹 真•免費開源 的AI工具 - OLlama

https://ollama.com/


OLlama 是一款可以直接在電腦運行 LLM 模型的工具
不需要聯網, 不需要付費, 只需要電腦設備好一點就可以使用了
我們第一次下載好 OLlama 時, 需要先下載大語言模型 (通常都很大, 需要等一段時間)

image.png

經過漫長的等待下載好之後,

就可以無需聯網, 直接在本地電腦使用 AI 聊天功能了:

image.png


本地的 OLlama 也有提供 API 功能可以串接
那我們就把它串到 Unity 之中看看
這樣一來, 我們就可以做到讓玩家不需要聯網, 也可以使用 AI 功能了
(當然玩家要先自己在電腦裝好 OLlama)

image.png

OLlama Core:

using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using Newtonsoft.Json;
using PolarAI.Scripts.Core.Ollama.Model;
using UnityEngine;
using UnityEngine.Networking;

namespace PolarAI.Scripts.Core.Ollama
{
    public class OllamaAICore
    {
        private readonly OllamaClientOptions _options;

        public OllamaAICore(OllamaClientOptions options)
        {
            _options = options ?? OllamaClientOptions.DefaultLocal();
            if (string.IsNullOrEmpty(_options.Host))
            {
                _options.Host = _options.Mode == OllamaMode.Turbo ? "https://ollama.com" : "http://localhost:11434";
            }
        }

        private string BuildUrl(string path)
        {
            if (string.IsNullOrEmpty(_options.Host)) return path;
            if (_options.Host.EndsWith("/"))
                return _options.Host.TrimEnd('/') + path;
            return _options.Host + path;
        }

        private UnityWebRequest BuildJsonPost(string url, string json)
        {
            var req = new UnityWebRequest(url, "POST");
            var bodyRaw = Encoding.UTF8.GetBytes(json);
            req.uploadHandler = new UploadHandlerRaw(bodyRaw);
            req.downloadHandler = new DownloadHandlerBuffer();
            req.SetRequestHeader("Content-Type", "application/json");

            if (_options.Mode == OllamaMode.Turbo && !string.IsNullOrEmpty(_options.ApiKey))
            {
                var value = string.IsNullOrEmpty(_options.ApiKeyHeaderPrefix)
                    ? _options.ApiKey
                    : (_options.ApiKeyHeaderPrefix + _options.ApiKey);
                req.SetRequestHeader(_options.ApiKeyHeaderName, value);
            }

            req.timeout = _options.Timeout;
            return req;
        }

        public IEnumerator ChatOnce(string model, List<OllamaMessage> messages,
            Action<string> onCompleted,
            Action<string> onError = null)
        {
            if (string.IsNullOrEmpty(model))
            {
                onError?.Invoke("Model Cannot Be Empty");
                yield break;
            }

            var url = BuildUrl("/api/chat");
            var payload = new OllamaChatRequest
            {
                model = model,
                messages = messages ?? new List<OllamaMessage>(),
                stream = false
            };
            var json = JsonUtility.ToJson(payload);

            using var req = BuildJsonPost(url, json);
            yield return req.SendWebRequest();

            if (req.result != UnityWebRequest.Result.Success)
            {
                onError?.Invoke($"HTTP Error: {req.responseCode} - {req.error}");
                yield break;
            }

            var text = req.downloadHandler.text;
            if (string.IsNullOrEmpty(text))
            {
                onError?.Invoke("空回應");
                yield break;
            }

            OllamaChatStreamChunk doc = null;
            try
            {
                doc = JsonConvert.DeserializeObject<OllamaChatStreamChunk>(text);
            }
            catch (Exception e)
            {
                onError?.Invoke($"JSON 解析失敗: {e.Message}\n{text}");
                yield break;
            }

            var content = (doc != null && doc.message != null) ? doc.message.content : null;
            if (string.IsNullOrEmpty(content))
            {
                onError?.Invoke("回傳內容為空");
                yield break;
            }

            onCompleted?.Invoke(content);
        }

        public IEnumerator ChatStream(string model, List<OllamaMessage> messages,
            Action<string> onToken,
            Action<string> onCompleted,
            Action<string> onError = null)
        {
            if (string.IsNullOrEmpty(model))
            {
                onError?.Invoke("Model Cannot Be Empty");
                yield break;
            }

            var url = BuildUrl("/api/chat");
            var payload = new OllamaChatRequest
            {
                model = model,
                messages = messages ?? new List<OllamaMessage>(),
                stream = true
            };
            var json = JsonUtility.ToJson(payload);

            var downloadHandler = new StreamingDownloadHandler(
                onLineJson: (line) =>
                {
                    if (string.IsNullOrEmpty(line)) return;

                    OllamaChatStreamChunk chunk = null;
                    try
                    {
                        chunk = JsonUtility.FromJson<OllamaChatStreamChunk>(line);
                    }
                    catch
                    {
                        return;
                    }

                    if (chunk == null) return;

                    if (chunk.message != null && !string.IsNullOrEmpty(chunk.message.content))
                    {
                        onToken?.Invoke(chunk.message.content);
                    }

                    if (chunk.done)
                    {
                        onCompleted?.Invoke(string.Empty);
                    }
                },
                onError: (err) => { onError?.Invoke(err); }
            );

            var req = new UnityWebRequest(url, "POST");
            var bodyRaw = Encoding.UTF8.GetBytes(json);
            req.uploadHandler = new UploadHandlerRaw(bodyRaw);
            req.downloadHandler = downloadHandler;
            req.SetRequestHeader("Content-Type", "application/json");
            req.timeout = _options.Timeout;

            if (_options.Mode == OllamaMode.Turbo && !string.IsNullOrEmpty(_options.ApiKey))
            {
                var value = string.IsNullOrEmpty(_options.ApiKeyHeaderPrefix)
                    ? _options.ApiKey
                    : (_options.ApiKeyHeaderPrefix + _options.ApiKey);
                req.SetRequestHeader(_options.ApiKeyHeaderName, value);
            }

            yield return req.SendWebRequest();

            if (req.result != UnityWebRequest.Result.Success)
            {
                onError?.Invoke($"HTTP Error: {req.responseCode} - {req.error}");
            }

            req.Dispose();
        }

        // 下載時即時處理逐行 JSON(以 '\n' 分隔)
        private class StreamingDownloadHandler : DownloadHandlerScript
        {
            private readonly Action<string> _onLineJson;
            private readonly Action<string> _onError;
            private readonly StringBuilder _buffer = new StringBuilder();

            public StreamingDownloadHandler(Action<string> onLineJson, Action<string> onError)
                : base()
            {
                _onLineJson = onLineJson;
                _onError = onError;
            }

            protected override bool ReceiveData(byte[] data, int dataLength)
            {
                if (data == null || dataLength <= 0)
                    return true;

                var text = Encoding.UTF8.GetString(data, 0, dataLength);
                _buffer.Append(text);

                // 依照換行切割
                int newlineIndex;
                while ((newlineIndex = _buffer.ToString().IndexOf('\n')) >= 0)
                {
                    var line = _buffer.ToString(0, newlineIndex).Trim();
                    _buffer.Remove(0, newlineIndex + 1);

                    if (!string.IsNullOrEmpty(line))
                    {
                        try
                        {
                            _onLineJson?.Invoke(line);
                        }
                        catch (Exception ex)
                        {
                            _onError?.Invoke($"處理串流行時發生錯誤: {ex.Message}");
                        }
                    }
                }

                return true;
            }

            protected override void CompleteContent()
            {
                // 尾端若尚有未換行的資料,視為一行處理
                var remaining = _buffer.ToString().Trim();
                if (!string.IsNullOrEmpty(remaining))
                {
                    try
                    {
                        _onLineJson?.Invoke(remaining);
                    }
                    catch (Exception ex)
                    {
                        _onError?.Invoke($"完成時處理剩餘資料錯誤: {ex.Message}");
                    }
                }

                _buffer.Length = 0;
            }

            protected override float GetProgress()
            {
                return base.GetProgress();
            }

            protected override byte[] GetData()
            {
                return null;
            }
        }
    }
}

使用方法:

using System.Collections.Generic;
using PolarAI.Scripts.Core.Ollama.Model;
using UnityEngine;
using UnityEngine.UI;

namespace PolarAI.Scripts.Core.Ollama.Example
{
    public class OllamaExample : MonoBehaviour
    {
        [Header("Mode/Server")]
        public bool useTurbo = false;
        [Tooltip("Local Default:http://localhost:11434")]
        public string localHost = "http://localhost:11434";
        [Tooltip("Turbo Default:https://ollama.com")]
        public string turboHost = "https://ollama.com";

        [Header("Model & APIKey")]
        [Tooltip("Example:gpt-oss:20b 或 gpt-oss:120b(Turbo)...")]
        public string model = "gpt-oss:120b";
        [Tooltip("Only Required For Turbo Mode")]
        public string turboApiKey = "<YOUR_API_KEY>";

        [Header("聊天設定")]
        public bool stream = true;
        [TextArea(3, 10)]
        public string userInput = "為什麼天空是藍色的?";

        [Header("輸出")]
        [TextArea(10, 20)]
        public string output = "";

        [Header("UGUI 參考(Inspector 指派)")]
        public InputField userInputField;
        public Text outputText;
        public Button btnSend;
        public Button btnClearOutput;
        public Button btnClearMemory;


        private OllamaAICore _aiCore;
        private readonly List<OllamaMessage> _history = new List<OllamaMessage>();

        private void Awake()
        {
            PushStateToUI();
            WireUiEvents();
            BuildClient();
            if (outputText) outputText.text = output ?? "";
        }

        private void OnValidate()
        {
            if (Application.isPlaying)
            {
                BuildClient();
            }
        }

        private void BuildClient()
        {
            OllamaClientOptions options;
            if (useTurbo)
            {
                options = OllamaClientOptions.DefaultTurbo(turboApiKey);
                if (!string.IsNullOrEmpty(turboHost)) options.Host = turboHost;
            }
            else
            {
                options = OllamaClientOptions.DefaultLocal();
                if (!string.IsNullOrEmpty(localHost)) options.Host = localHost;
            }

            _aiCore = new OllamaAICore(options);
        }

        [ContextMenu("Send Once (non-stream)")]
        public void SendOnce()
        {
            if (_aiCore == null) BuildClient();

            SyncFromUI();
            var messages = new List<OllamaMessage>();
            messages.AddRange(_history);
            messages.Add(new OllamaMessage { role = "user", content = userInput });

            output = "[Sending non-stream request...]\n";
            UpdateOutputUI();

            StopAllCoroutines();
            StartCoroutine(_aiCore.ChatOnce(
                model,
                messages,
                onCompleted: (text) =>
                {
                    output += text + "\n";
                    UpdateOutputUI();
                    _history.Add(new OllamaMessage { role = "user", content = userInput });
                    _history.Add(new OllamaMessage { role = "assistant", content = text });
                },
                onError: (err) =>
                {
                    output += $"[Error] {err}\n";
                    UpdateOutputUI();
                }
            ));
        }

        [ContextMenu("Send Stream")]
        public void SendStream()
        {
            if (_aiCore == null) BuildClient();

            SyncFromUI();
            var messages = new List<OllamaMessage>();
            messages.AddRange(_history);
            messages.Add(new OllamaMessage { role = "user", content = userInput });

            output = "[Streaming...]\n";
            UpdateOutputUI();

            StopAllCoroutines();
            StartCoroutine(_aiCore.ChatStream(
                model,
                messages,
                onToken: (token) =>
                {
                    output += token;
                    UpdateOutputUI();
                },
                onCompleted: (_) =>
                {
                    output += "\n[Done]\n";
                    UpdateOutputUI();
                    // 將完整輸出存回歷史(簡單做法:整段文本)
                    _history.Add(new OllamaMessage { role = "user", content = userInput });
                    _history.Add(new OllamaMessage { role = "assistant", content = output });
                },
                onError: (err) =>
                {
                    output += $"\n[Error] {err}\n";
                    UpdateOutputUI();
                }
            ));
        }

        // ============ UGUI 輔助 ============
        private void WireUiEvents()
        {
            
            if (userInputField)
            {
                userInputField.text = userInput;
                userInputField.onEndEdit.AddListener(v => { userInput = v; });
            }

            if (btnSend) btnSend.onClick.AddListener(() => { if (stream) SendStream(); else SendOnce(); });
            if (btnClearOutput) btnClearOutput.onClick.AddListener(OnClickClearOutput);
            if (btnClearMemory) btnClearMemory.onClick.AddListener(OnClickClearHistory);
        }

        private void SyncFromUI()
        {
            if (userInputField) userInput = userInputField.text;
            BuildClient();
        }

        private void PushStateToUI()
        {
            if (userInputField) userInputField.text = userInput;
            UpdateOutputUI();
        }

        private void UpdateOutputUI()
        {
            if (outputText) outputText.text = output ?? "";
        }

        // ============ UGUI Buttons ============
        public void OnClickClearOutput()
        {
            output = "";
            UpdateOutputUI();
        }

        public void OnClickClearHistory()
        {
            _history.Clear();
        }
    }
}

上一篇
Day 16 - Groq API 免費開源的文本生成模型
下一篇
Day 18 - ComfyUI 本地化的圖片生成 API
系列文
Vibe Unity - AI時代的遊戲開發工作流30
圖片
  熱門推薦
圖片
{{ item.channelVendor }} | {{ item.webinarstarted }} |
{{ formatDate(item.duration) }}
直播中

尚未有邦友留言

立即登入留言